library(rms)
## Warning: 패키지 'rms'는 R 버전 4.3.3에서 작성되었습니다
## 필요한 패키지를 로딩중입니다: Hmisc
## Warning: 패키지 'Hmisc'는 R 버전 4.3.3에서 작성되었습니다
## 
## 다음의 패키지를 부착합니다: 'Hmisc'
## The following objects are masked from 'package:base':
## 
##     format.pval, units
## Warning in .recacheSubclasses(def@className, def, env): 클래스 "replValueSp"의
## 서브 클래스 "ndiMatrix"가 정의되지 않았습니다; 업데이트된 정의가 아닙니다
library(e1071)
## Warning: 패키지 'e1071'는 R 버전 4.3.3에서 작성되었습니다
## 
## 다음의 패키지를 부착합니다: 'e1071'
## The following object is masked from 'package:Hmisc':
## 
##     impute
require(rjags)
## 필요한 패키지를 로딩중입니다: rjags
## Warning: 패키지 'rjags'는 R 버전 4.3.3에서 작성되었습니다
## 필요한 패키지를 로딩중입니다: coda
## Warning: 패키지 'coda'는 R 버전 4.3.3에서 작성되었습니다
## Linked to JAGS 4.3.1
## Loaded modules: basemod,bugs
require(runjags)
## 필요한 패키지를 로딩중입니다: runjags
## Warning: 패키지 'runjags'는 R 버전 4.3.3에서 작성되었습니다
require(coda)

## Data Load & Preprocessing -------------------------------
data18 <- read.csv("D:/0.KAIST_DHCSS(Master)/석사_1기/Bayesian Statistics/Project/data2018_순위.csv", fileEncoding = "euc-kr")

colnames(data18)
##  [1] "시도명"                      "시군구명"                   
##  [3] "aver_per_rank"               "SIGNGU_CD"                  
##  [5] "대학교_수"                   "창조산업_수"                
##  [7] "음식점업_수"                 "공원_수"                    
##  [9] "인구십만명당_문화기반시설수" "인구_천명당_외국인_수"      
## [11] "수도권"                      "ratio_crea"
colnames(data18) <- c("Si_do_NM", "Si-gun-gu_NM", "aver_per_rank", "SIGNGU_CD", "Study_uni", "Study_industries", "Rest_cafe",
                      "Rest_park", "Culture_building", "Culture_people", "Capital", "Creative_Class")
colnames(data18)
##  [1] "Si_do_NM"         "Si-gun-gu_NM"     "aver_per_rank"    "SIGNGU_CD"       
##  [5] "Study_uni"        "Study_industries" "Rest_cafe"        "Rest_park"       
##  [9] "Culture_building" "Culture_people"   "Capital"          "Creative_Class"
describe(data18) #결측치 없음. 
## data18 
## 
##  12  Variables      74  Observations
## --------------------------------------------------------------------------------
## Si_do_NM 
##        n  missing distinct 
##       74        0        7 
##                                                                             
## Value      광주광역시 대구광역시 대전광역시 부산광역시 서울특별시 울산광역시
## Frequency      5          8          5         16         25          5     
## Proportion 0.068      0.108      0.068      0.216      0.338      0.068     
##                      
## Value      인천광역시
## Frequency     10     
## Proportion 0.135     
## --------------------------------------------------------------------------------
## Si-gun-gu_NM 
##        n  missing distinct 
##       74        0       53 
## 
## lowest : 강남구   강동구   강북구   강서구   강화군  , highest: 은평구   종로구   중구     중랑구   해운대구
## --------------------------------------------------------------------------------
## aver_per_rank 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       73        1   0.4883   0.2075   0.1572   0.2555 
##      .25      .50      .75      .90      .95 
##   0.3748   0.5027   0.6145   0.7265   0.7758 
## 
## lowest : 0         0.0676778 0.0958904 0.143836  0.164384 
## highest: 0.773728  0.77966   0.785388  0.802348  0.90252  
## --------------------------------------------------------------------------------
## SIGNGU_CD 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1    22446     8401    11210    11294 
##      .25      .50      .75      .90      .95 
##    11568    26455    28243    30161    31121 
## 
## lowest : 11110 11140 11170 11200 11215, highest: 31110 31140 31170 31200 31710
## --------------------------------------------------------------------------------
## Study_uni 
##        n  missing distinct     Info     Mean      Gmd 
##       74        0        7    0.946    1.676    1.861 
##                                                     
## Value          0     1     2     3     4     5     6
## Frequency     24    18    11     9     7     1     4
## Proportion 0.324 0.243 0.149 0.122 0.095 0.014 0.054
## 
## For the frequency table, variable is rounded to the nearest 0
## --------------------------------------------------------------------------------
## Study_industries 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1     3372     2631    749.4   1085.6 
##      .25      .50      .75      .90      .95 
##   1680.0   2795.0   4086.0   6065.0   7739.9 
## 
## lowest :    84   399   503   698   777, highest:  7636  7933  7975 13096 21662
## --------------------------------------------------------------------------------
## Rest_cafe 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1     2275     1372    834.4    993.1 
##      .25      .50      .75      .90      .95 
##   1386.2   2072.0   2832.8   3712.5   4550.5 
## 
## lowest :  177  272  504  707  903, highest: 4448 4741 4854 5386 8598
## --------------------------------------------------------------------------------
## Rest_park 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       62        1    88.07    62.61     9.95    20.80 
##      .25      .50      .75      .90      .95 
##    49.00    80.50   120.25   167.50   189.45 
## 
## lowest :   0   4   8  11  12, highest: 187 194 212 214 256
## --------------------------------------------------------------------------------
## Culture_building 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       43    0.999    4.936    4.006    1.800    2.060 
##      .25      .50      .75      .90      .95 
##    2.525    3.300    4.575    9.020   14.115 
## 
## lowest : 1.2  1.7  1.8  1.9  2   , highest: 13.1 16   18   19.1 41.2
## --------------------------------------------------------------------------------
## Culture_people 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       73        1    22.42    19.79    5.506    6.650 
##      .25      .50      .75      .90      .95 
##    9.463   13.645   27.025   43.447   73.259 
## 
## lowest : 3.52  3.78  3.79  5.37  5.58 , highest: 70.27 78.81 84.03 85.95 97.4 
## --------------------------------------------------------------------------------
## Capital 
##        n  missing distinct     Info      Sum     Mean      Gmd 
##       74        0        2    0.748       35    0.473   0.5054 
## 
## --------------------------------------------------------------------------------
## Creative_Class 
##        n  missing distinct     Info     Mean      Gmd      .05      .10 
##       74        0       74        1   0.1753   0.1541  0.06140  0.06633 
##      .25      .50      .75      .90      .95 
##  0.08302  0.10070  0.17143  0.35209  0.51408 
## 
## lowest : 0.0282373 0.0571838 0.0573348 0.0610268 0.0615934
## highest: 0.48979   0.55918   0.660765  0.698749  1.33565  
## --------------------------------------------------------------------------------
# 스케일링할 열

for (j in c("Study_uni", "Study_industries", "Rest_cafe",
            "Rest_park", "Culture_building", "Culture_people", "Creative_Class")) 
{
  hist(data18[,j], main=j, 
       xlab = skewness(data18[,j]))  
}

# 왜도가 심함.
# Standardization or centering: Use 'scale()' function.
# Yeo-Johnson transformation
# install.packages('bestNormalize')
library(bestNormalize) # Study_uni & Culture_building 
## Warning: 패키지 'bestNormalize'는 R 버전 4.3.3에서 작성되었습니다
data18$Study_uni = yeojohnson(data18$Study_uni)$x.t
data18$Study_industries = yeojohnson(data18$Study_industries)$x.t
data18$Rest_cafe = yeojohnson(data18$Rest_cafe)$x.t
data18$Rest_park = yeojohnson(data18$Rest_park)$x.t
data18$Culture_building = yeojohnson(data18$Culture_building)$x.t
data18$Culture_people = yeojohnson(data18$Culture_people)$x.t
data18$Creative_Class = yeojohnson(data18$Creative_Class)$x.t

for (j in c("Study_uni", "Study_industries", "Rest_cafe",
            "Rest_park", "Culture_building", "Culture_people", "Creative_Class")) 
{
  hist(data18[,j], main=j, 
       xlab = skewness(data18[,j]))  
}

X = data18[, c("Study_uni", "Study_industries", "Rest_cafe",
               "Rest_park", "Culture_building", "Culture_people", "Creative_Class")]

## Modeling ----------------------
myData = data18
y = myData$aver_per_rank
x1 = myData$Study_uni
x2 = myData$Study_industries
x3 = myData$Rest_cafe
x4 = myData$Rest_park
x5 = myData$Culture_building
x6 = myData$Culture_people
x7 = myData$Creative_Class
nn = length(y)

dataList = list(
  y=y, x1=x1, x2=x2, x3=x3, x4=x4, x5=x5, x6=x6, x7=x7, Ntotal=nn
)

M1_2018

# Define the model with no individual differences
modelString_M1_2018 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    nu ~ dexp(0.0333)
    }
"
# MCMC run
myinits_M12018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), nu = runif(1)))

out_M12018 <- run.jags (model=modelString_M1_2018, data=dataList, inits=myinits_M12018,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "sigma", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## module dic loaded
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M12018) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                                
##         Lower95    Median    Upper95      Mean       SD Mode      MCerr MC%ofSD
## b0      0.45396     0.487    0.52024    0.4871  0.01691   -- 0.00017953     1.1
## b1    -0.018029  0.016697   0.050766  0.016655 0.017363   -- 0.00021536     1.2
## b2     -0.23094  -0.11481 -0.0015333  -0.11505 0.058392   --   0.002683     4.6
## b3      0.12935   0.22729    0.32776   0.22708 0.050453   --  0.0020574     4.1
## b4    -0.081994 -0.035647   0.012861 -0.035666 0.024264   -- 0.00053944     2.2
## b5    -0.072231 -0.026842   0.016674 -0.027229  0.02278   -- 0.00049546     2.2
## b6    -0.030419 0.0060015   0.044041 0.0062538  0.01906   -- 0.00030134     1.6
## b7    -0.064489  0.005234   0.070718 0.0053164 0.034106   --  0.0010043     2.9
## sigma   0.11174   0.13713    0.16406    0.1379 0.013406   -- 0.00018149     1.4
## nu       2.5495    31.033     103.85    40.286   31.757   --    0.55861     1.8
##                             
##       SSeff     AC.10   psrf
## b0     8872 -0.012356 1.0003
## b1     6500   0.02474 1.0002
## b2      474   0.53112  1.004
## b3      601   0.44946 1.0045
## b4     2023    0.1047 1.0001
## b5     2114   0.10852 1.0005
## b6     4001  0.051126 1.0007
## b7     1153   0.24828 1.0005
## sigma  5457 0.0004841 1.0003
## nu     3232  0.023679 1.0002
## 
## Model fit assessment:
## DIC = -68.86187
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 9.09414
## 
## Total time taken: 48.1 seconds
plot(out_M12018) 
## Generating plots...

## PVAF --------------------------
b00_M1 <- as.mcmc.list(out_M12018, vars = "b0")
b0_M1 <- as.numeric(unlist(b00_M1[, 1]))
b11_M1 <- as.mcmc.list(out_M12018, vars = "b1")
b1_M1 <- as.numeric(unlist(b11_M1[, 1]))
b22_M1 <- as.mcmc.list(out_M12018, vars = "b")
b2_M1 <- as.numeric(unlist(b22_M1[, 1]))
b33_M1 <- as.mcmc.list(out_M12018, vars = "b3")
b3_M1 <- as.numeric(unlist(b33_M1[, 1]))
b44_M1 <- as.mcmc.list(out_M12018, vars = "b4")
b4_M1 <- as.numeric(unlist(b44_M1[, 1]))
b55_M1 <- as.mcmc.list(out_M12018, vars = "b5")
b5_M1 <- as.numeric(unlist(b55_M1[, 1]))
b66_M1 <- as.mcmc.list(out_M12018, vars = "b6")
b6_M1 <- as.numeric(unlist(b66_M1[, 1]))
b77_M1 <- as.mcmc.list(out_M12018, vars = "b7")
b7_M1 <- as.numeric(unlist(b77_M1[, 1]))

sigmas_M1 <- as.mcmc.list(out_M12018, vars = "sigma")
s_M1 <- as.numeric(unlist(sigmas_M1[, 1]))
nus_M1 <-  as.mcmc.list(out_M12018, vars = "nu")
nu_M1 <- as.numeric(unlist(nus_M1[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1[j*10] +
      b1_M1[j*10] * x1[j] +
      b2_M1[j*10] * x2[j] +
      b3_M1[j*10] * x3[j] +
      b4_M1[j*10] * x4[j] +
      b5_M1[j*10] * x5[j] +
      b6_M1[j*10] * x6[j] +
      b7_M1[j*10] * x7[j] + s_M1[j*10]*rt(1, nu_M1[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1[i*400] + 
      b1_M1[i*400] * X1 + 
      b2_M1[i*400] * X2 +
      b3_M1[i*400] * X3 +
      b4_M1[i*400] * X4 +
      b5_M1[i*400] * X5 +
      b6_M1[i*400] * X6 +
      b7_M1[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1[i*400] + 
      b1_M1[i*400] * x1 + 
      b2_M1[i*400] * x2 +
      b3_M1[i*400] * x3 +
      b4_M1[i*400] * x4 +
      b5_M1[i*400] * x5 +
      b6_M1[i*400] * x6 +
      b7_M1[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1 <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1[i*400] + 
    b1_M1[i*400] * X1 + 
    b2_M1[i*400] * X2 +
    b3_M1[i*400] * X3 +
    b4_M1[i*400] * X4 +
    b5_M1[i*400] * X5 +
    b6_M1[i*400] * X6 +
    b7_M1[i*400] * X7  
    
  
  y.prd <- b0_M1[i*400] + 
    b1_M1[i*400] * x1 + 
    b2_M1[i*400] * x2 +
    b3_M1[i*400] * x3 +
    b4_M1[i*400] * x4 +
    b5_M1[i*400] * x5 +
    b6_M1[i*400] * x6 +
    b7_M1[i*400] * x7 
  
  pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1), "\n") #  -10.78586   
## Mean Percent Variance Accounted For (PVAF): -9.264221
# Normality check for residuals
residu <- mean(b0_M1) + mean(b1_M1)*x1 + mean(b2_M1)*x2 + mean(b3_M1)*x3 + 
  mean(b4_M1)*x4 + mean(b5_M1)*x5 + mean(b6_M1)*x6 + mean(b7_M1)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1n_2018

# Define the model with no individual differences
modelString_M1s_2018 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
      
    }
"
# MCMC run
myinits_M1s2018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)),
                    list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)),
                    list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                         b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                         sigma = runif(1)))

out_M1s2018 <- run.jags (model=modelString_M1s_2018, data=dataList, inits=myinits_M1s2018,
                         n.chains=3,
                    adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                  "b5","b6", "b7", "sigma", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 9 variables....
## Finished running the simulation
print(out_M1s2018) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                               
##         Lower95    Median   Upper95      Mean       SD Mode      MCerr MC%ofSD
## b0      0.45552   0.48813   0.52103   0.48823 0.016648   -- 0.00013599     0.8
## b1    -0.016821  0.017077  0.052443  0.017344 0.017625   -- 0.00021534     1.2
## b2     -0.21962  -0.11598 0.0010958  -0.11583 0.056381   --  0.0024841     4.4
## b3      0.12539   0.22776   0.32548   0.22816  0.05069   --  0.0020281       4
## b4     -0.08577  -0.03726  0.010917 -0.037091 0.024783   -- 0.00055348     2.2
## b5    -0.071008 -0.028593  0.016994 -0.028391 0.022397   -- 0.00046064     2.1
## b6    -0.031199  0.006125  0.042985 0.0059268 0.018822   -- 0.00029247     1.6
## b7    -0.059227 0.0050918   0.07485 0.0051367 0.034261   -- 0.00098895     2.9
## sigma   0.11833   0.14168   0.16806   0.14269  0.01278   -- 0.00015979     1.3
##                              
##       SSeff     AC.10    psrf
## b0    14986 -0.001489 0.99995
## b1     6699  0.023005  1.0005
## b2      515   0.50206  1.0054
## b3      625   0.44063  1.0036
## b4     2005  0.093959  1.0014
## b5     2364  0.093767   1.001
## b6     4142  0.044273  1.0009
## b7     1200   0.22013  1.0022
## sigma  6396  0.018814  1.0001
## 
## Model fit assessment:
## DIC = -69.28302
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 9.21529
## 
## Total time taken: 9 seconds
plot(out_M1s2018) 
## Generating plots...

## PVAF --------------------------
b00_M1s <- as.mcmc.list(out_M1s2018, vars = "b0")
b0_M1s <- as.numeric(unlist(b00_M1s[, 1]))
b11_M1s <- as.mcmc.list(out_M1s2018, vars = "b1")
b1_M1s <- as.numeric(unlist(b11_M1s[, 1]))
b22_M1s <- as.mcmc.list(out_M1s2018, vars = "b2")
b2_M1s <- as.numeric(unlist(b22_M1s[, 1]))
b33_M1s <- as.mcmc.list(out_M1s2018, vars = "b3")
b3_M1s <- as.numeric(unlist(b33_M1s[, 1]))
b44_M1s <- as.mcmc.list(out_M1s2018, vars = "b4")
b4_M1s <- as.numeric(unlist(b44_M1s[, 1]))
b55_M1s <- as.mcmc.list(out_M1s2018, vars = "b5")
b5_M1s <- as.numeric(unlist(b55_M1s[, 1]))
b66_M1s <- as.mcmc.list(out_M1s2018, vars = "b6")
b6_M1s <- as.numeric(unlist(b66_M1s[, 1]))
b77_M1s <- as.mcmc.list(out_M1s2018, vars = "b7")
b7_M1s <- as.numeric(unlist(b77_M1s[, 1]))

sigmas_M1s <- as.mcmc.list(out_M1s2018, vars = "sigma")
s_M1s <- as.numeric(unlist(sigmas_M1s[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1s[j*10] +
      b1_M1s[j*10] * x1[j] +
      b2_M1s[j*10] * x2[j] +
      b3_M1s[j*10] * x3[j] +
      b4_M1s[j*10] * x4[j] +
      b5_M1s[j*10] * x5[j] +
      b6_M1s[j*10] * x6[j] +
      b7_M1s[j*10] * x7[j] + rnorm(1, 0, s_M1s[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1s <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1s[i*400] + 
      b1_M1s[i*400] * X1 + 
      b2_M1s[i*400] * X2 +
      b3_M1s[i*400] * X3 +
      b4_M1s[i*400] * X4 +
      b5_M1s[i*400] * X5 +
      b6_M1s[i*400] * X6 +
      b7_M1s[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1s[i*400] + 
      b1_M1s[i*400] * x1 + 
      b2_M1s[i*400] * x2 +
      b3_M1s[i*400] * x3 +
      b4_M1s[i*400] * x4 +
      b5_M1s[i*400] * x5 +
      b6_M1s[i*400] * x6 +
      b7_M1s[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
  
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1s <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1s[i*400] + 
    b1_M1s[i*400] * X1 + 
    b2_M1s[i*400] * X2 +
    b3_M1s[i*400] * X3 +
    b4_M1s[i*400] * X4 +
    b5_M1s[i*400] * X5 +
    b6_M1s[i*400] * X6 +
    b7_M1s[i*400] * X7 
  
  y.prd <- b0_M1s[i*400] + 
    b1_M1s[i*400] * x1 + 
    b2_M1s[i*400] * x2 +
    b3_M1s[i*400] * x3 +
    b4_M1s[i*400] * x4 +
    b5_M1s[i*400] * x5 +
    b6_M1s[i*400] * x6 +
    b7_M1s[i*400] * x7 
    
  pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1s), "\n") # -0.4804828 
## Mean Percent Variance Accounted For (PVAF): 0.3935802
# Normality check for residuals
residu <- mean(b0_M1s) + mean(b1_M1s)*x1 + mean(b2_M1s)*x2 + mean(b3_M1s)*x3 + 
  mean(b4_M1s)*x4 + mean(b5_M1s)*x5 + mean(b6_M1s)*x6 + mean(b7_M1s)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1x_2018

# Define the model with no individual differences
modelString_M1x_2018 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    nu ~ dexp(0.0333)
    }
"
# MCMC run
myinits_M1x_2018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1)))

out_M1x2018 <- run.jags (model=modelString_M1x_2018, data=dataList, inits=myinits_M1x_2018,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "b23", "sigma", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1x2018) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                         
##         Lower95     Median   Upper95       Mean       SD Mode      MCerr
## b0      0.45602    0.49277   0.53273     0.4926 0.019543   -- 0.00027101
## b1    -0.019753   0.014633  0.049104    0.01471 0.017736   -- 0.00023028
## b2      -0.2279   -0.11375 0.0020368   -0.11371 0.058852   --  0.0027692
## b3      0.12941     0.2293   0.33057    0.22968 0.050938   --   0.002086
## b4    -0.085624  -0.037116  0.011272  -0.037346 0.024622   -- 0.00056632
## b5    -0.070904  -0.023432   0.02333  -0.023347 0.024138   -- 0.00054174
## b6    -0.031113  0.0051452  0.043396  0.0050554 0.019105   -- 0.00032052
## b7    -0.063121  0.0039513  0.072762  0.0034458 0.034527   --  0.0010515
## b23   -0.025782 -0.0056246  0.015797 -0.0055133 0.010567   -- 0.00016569
## sigma   0.11169    0.13794   0.16561    0.13881 0.013684   -- 0.00018448
## nu       3.1487      31.51    104.26       40.6   32.239   --    0.56304
##                                      
##       MC%ofSD SSeff     AC.10    psrf
## b0        1.4  5200 0.0045111  1.0003
## b1        1.3  5932  0.024223       1
## b2        4.7   452   0.56258  1.0025
## b3        4.1   596   0.44962  1.0034
## b4        2.3  1890   0.12794  1.0013
## b5        2.2  1985   0.12612       1
## b6        1.7  3553  0.071338  1.0003
## b7          3  1078   0.27201  1.0005
## b23       1.6  4067 0.0033747 0.99997
## sigma     1.3  5502    0.0173  1.0003
## nu        1.7  3279  0.012102 0.99998
## 
## Model fit assessment:
## DIC = -67.10325
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 10.11844
## 
## Total time taken: 52 seconds
plot(out_M1x2018) 
## Generating plots...

## PVAF --------------------------
b00_M1x <- as.mcmc.list(out_M1x2018, vars = "b0")
b0_M1x <- as.numeric(unlist(b00_M1x[, 1]))
b11_M1x <- as.mcmc.list(out_M1x2018, vars = "b1")
b1_M1x <- as.numeric(unlist(b11_M1x[, 1]))
b22_M1x <- as.mcmc.list(out_M1x2018, vars = "b2")
b2_M1x <- as.numeric(unlist(b22_M1x[, 1]))
b33_M1x <- as.mcmc.list(out_M1x2018, vars = "b3")
b3_M1x <- as.numeric(unlist(b33_M1x[, 1]))
b44_M1x <- as.mcmc.list(out_M1x2018, vars = "b4")
b4_M1x <- as.numeric(unlist(b44_M1x[, 1]))
b55_M1x <- as.mcmc.list(out_M1x2018, vars = "b5")
b5_M1x <- as.numeric(unlist(b55_M1x[, 1]))
b66_M1x <- as.mcmc.list(out_M1x2018, vars = "b6")
b6_M1x <- as.numeric(unlist(b66_M1x[, 1]))
b77_M1x <- as.mcmc.list(out_M1x2018, vars = "b7")
b7_M1x <- as.numeric(unlist(b77_M1x[, 1]))
b233_M1x <- as.mcmc.list(out_M1x2018, vars = "b23")
b23_M1x <- as.numeric(unlist(b233_M1x[, 1]))

sigmas_M1x <- as.mcmc.list(out_M1x2018, vars = "sigma")
s_M1x <- as.numeric(unlist(sigmas_M1x[, 1]))
nus_M1x <-  as.mcmc.list(out_M1x2018, vars = "nu")
nu_M1x <- as.numeric(unlist(nus_M1x[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1x[j*10] +
      b1_M1x[j*10] * x1[j] +
      b2_M1x[j*10] * x2[j] +
      b3_M1x[j*10] * x3[j] +
      b4_M1x[j*10] * x4[j] +
      b5_M1x[j*10] * x5[j] +
      b6_M1x[j*10] * x6[j] +
      b7_M1x[j*10] * x7[j] + b23_M1x[j*10]*x2[j]*x3[j] + s_M1x[j*10]*rt(1, nu_M1x[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1x[i*400] + 
      b1_M1x[i*400] * X1 + 
      b2_M1x[i*400] * X2 +
      b3_M1x[i*400] * X3 +
      b4_M1x[i*400] * X4 +
      b5_M1x[i*400] * X5 +
      b6_M1x[i*400] * X6 +
      b7_M1x[i*400] * X7 + b23_M1x[i*400]*X2*X3
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1x[i*400] +
      b1_M1x[i*400] * x1 +
      b2_M1x[i*400] * x2 +
      b3_M1x[i*400] * x3 +
      b4_M1x[i*400] * x4 +
      b5_M1x[i*400] * x5 +
      b6_M1x[i*400] * x6 +
      b7_M1x[i*400] * x7 + b23_M1x[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1x <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1x[i*400] + 
    b1_M1x[i*400] * X1 + 
    b2_M1x[i*400] * X2 +
    b3_M1x[i*400] * X3 +
    b4_M1x[i*400] * X4 +
    b5_M1x[i*400] * X5 +
    b6_M1x[i*400] * X6 +
    b7_M1x[i*400] * X7 + b23_M1x[i*400]*X2*X3
  
  
  y.prd <- b0_M1x[i*400] +
    b1_M1x[i*400] * x1 +
    b2_M1x[i*400] * x2 +
    b3_M1x[i*400] * x3 +
    b4_M1x[i*400] * x4 +
    b5_M1x[i*400] * x5 +
    b6_M1x[i*400] * x6 +
    b7_M1x[i*400] * x7 + b23_M1x[i*400]*x2*x3
  
  pvaf_M1x[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1x), "\n") # -0.2739477 
## Mean Percent Variance Accounted For (PVAF): 0.3877999
# Normality check for residuals
residu <- mean(b0_M1x) + mean(b1_M1x)*x1 + mean(b2_M1x)*x2 + mean(b3_M1x)*x3 + 
  mean(b4_M1x)*x4 + mean(b5_M1x)*x5 + mean(b6_M1x)*x6 + mean(b7_M1x)*x7 + mean(b23_M1x)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1xn_2018

# Define the model with no individual differences
modelString_M1xn_2018 = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ ddexp(0.0, sqrt(2.0))
    b2 ~ ddexp(0.0, sqrt(2.0))
    b3 ~ ddexp(0.0, sqrt(2.0))
    b4 ~ ddexp(0.0, sqrt(2.0))
    b5 ~ ddexp(0.0, sqrt(2.0))
    b6 ~ ddexp(0.0, sqrt(2.0))
    b7 ~ ddexp(0.0, sqrt(2.0))
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
      
    }
"
# MCMC run
myinits_M1xn_2018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1)))

out_M1xn2018 <- run.jags (model=modelString_M1xn_2018, data=dataList, inits=myinits_M1xn_2018,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "b23", "sigma", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M1xn2018) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                         
##         Lower95     Median   Upper95       Mean       SD Mode      MCerr
## b0      0.45642    0.49348   0.53216     0.4936 0.019127   -- 0.00020593
## b1    -0.019195   0.014673  0.050601   0.014619   0.0178   -- 0.00021502
## b2     -0.23319   -0.11192 -0.005204   -0.11317 0.058341   --  0.0026165
## b3      0.12688    0.23017   0.32409    0.23087 0.051217   --  0.0020511
## b4    -0.088368   -0.03972 0.0086918   -0.03975 0.024829   -- 0.00053885
## b5    -0.070137  -0.023556  0.023509  -0.023503 0.023953   -- 0.00053265
## b6    -0.034468  0.0046383  0.041691    0.00474 0.019315   -- 0.00031508
## b7    -0.067854  0.0018569  0.070872  0.0022316 0.034996   --   0.001045
## b23   -0.025516 -0.0058985  0.014996 -0.0057662 0.010262   -- 0.00012326
## sigma   0.11994    0.14265   0.16919    0.14349 0.012734   -- 0.00015476
##                                       
##       MC%ofSD SSeff      AC.10    psrf
## b0        1.1  8627 -0.0074741  1.0004
## b1        1.2  6853  0.0027727  1.0013
## b2        4.5   497    0.50907  1.0112
## b3          4   624     0.4413  1.0056
## b4        2.2  2123    0.10614  1.0014
## b5        2.2  2022    0.12886  1.0063
## b6        1.6  3758   0.051262  1.0004
## b7          3  1122    0.24008  1.0053
## b23       1.2  6932 0.00059861   1.001
## sigma     1.2  6771  0.0042454 0.99998
## 
## Model fit assessment:
## DIC = -67.60378
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 10.17908
## 
## Total time taken: 8.7 seconds
plot(out_M1xn2018) 
## Generating plots...

## PVAF --------------------------
b00_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b0")
b0_M1xn <- as.numeric(unlist(b00_M1xn[, 1]))
b11_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b1")
b1_M1xn <- as.numeric(unlist(b11_M1xn[, 1]))
b22_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b2")
b2_M1xn <- as.numeric(unlist(b22_M1xn[, 1]))
b33_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b3")
b3_M1xn <- as.numeric(unlist(b33_M1xn[, 1]))
b44_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b4")
b4_M1xn <- as.numeric(unlist(b44_M1xn[, 1]))
b55_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b5")
b5_M1xn <- as.numeric(unlist(b55_M1xn[, 1]))
b66_M1xm <- as.mcmc.list(out_M1xn2018, vars = "b6")
b6_M1xn <- as.numeric(unlist(b66_M1xm[, 1]))
b77_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b7")
b7_M1xn <- as.numeric(unlist(b77_M1xn[, 1]))
b233_M1xn <- as.mcmc.list(out_M1xn2018, vars = "b23")
b23_M1xn <- as.numeric(unlist(b233_M1xn[, 1]))


sigmas_M1xn <- as.mcmc.list(out_M1xn2018, vars = "sigma")
s_M1xn <- as.numeric(unlist(sigmas_M1xn[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xn[j*10] +
      b1_M1xn[j*10] * x1[j] +
      b2_M1xn[j*10] * x2[j] +
      b3_M1xn[j*10] * x3[j] +
      b4_M1xn[j*10] * x4[j] +
      b5_M1xn[j*10] * x5[j] +
      b6_M1xn[j*10] * x6[j] +
      b7_M1xn[j*10] * x7[j] + b23_M1xn[j*10]*x2[j]*x3[j] +rnorm(1, 0, s_M1s[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1s <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * X1 + 
      b2_M1xn[i*400] * X2 +
      b3_M1xn[i*400] * X3 +
      b4_M1xn[i*400] * X4 +
      b5_M1xn[i*400] * X5 +
      b6_M1xn[i*400] * X6 +
      b7_M1xn[i*400] * X7 + + b23_M1xn[i*400]*X2*X3
      
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * x1 + 
      b2_M1xn[i*400] * x2 +
      b3_M1xn[i*400] * x3 +
      b4_M1xn[i*400] * x4 +
      b5_M1xn[i*400] * x5 +
      b6_M1xn[i*400] * x6 +
      b7_M1xn[i*400] * x7 + b23_M1xn[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1s <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xn[i*400] + 
      b1_M1xn[i*400] * X1 + 
      b2_M1xn[i*400] * X2 +
      b3_M1xn[i*400] * X3 +
      b4_M1xn[i*400] * X4 +
      b5_M1xn[i*400] * X5 +
      b6_M1xn[i*400] * X6 +
      b7_M1xn[i*400] * X7 + b23_M1xn[i*400]*X2*X3
    
  
  y.prd <- b0_M1xn[i*400] + 
    b1_M1xn[i*400] * x1 + 
    b2_M1xn[i*400] * x2 +
    b3_M1xn[i*400] * x3 +
    b4_M1xn[i*400] * x4 +
    b5_M1xn[i*400] * x5 +
    b6_M1xn[i*400] * x6 +
    b7_M1xn[i*400] * x7 + b23_M1xn[i*400]*x2*x3
  
  pvaf_M1s[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1s), "\n") #  -10.16325 
## Mean Percent Variance Accounted For (PVAF): 0.3953835
# Normality check for residuals
residu <- mean(b0_M1xn) + mean(b1_M1xn)*x1 + mean(b2_M1xn)*x2 + mean(b3_M1xn)*x3 + 
  mean(b4_M1xn)*x4 + mean(b5_M1xn)*x5 + mean(b6_M1xn)*x6 + mean(b7_M1xn)*x7 + mean(b23_M1xn)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1h_2018

modelString_M1h_2018 = "
model{
  for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2, nu)
    }
      
  # priors vague 
  b0 ~ dnorm(0, 1/2^2)
  b1 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b2 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b3 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b4 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b5 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b6 ~ dt(0.0, 1/sigmaBeta^2, 1)
  b7 ~ dt(0.0, 1/sigmaBeta^2, 1)
  
  # Sigma
  sigma ~ dgamma(0.01, 0.01)
  nu ~ dexp(0.0333)
  sigmaBeta ~ dgamma(2.618, 1.618) 
}
"

# MCMC run
myinits_M1h2018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)),
                        list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)),
                        list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                             b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                             sigma = runif(1), nu = runif(1), sigmaBeta = runif(1)))

out_M1h2018 <- run.jags(model=modelString_M1h_2018, data=dataList, inits=myinits_M1h2018,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "sigma", "nu", "sigmaBeta","pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1h2018) #  pD = 9.20454,DIC = -68.63088
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                            
##             Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0          0.45423    0.48727  0.51883    0.48718 0.016589   -- 0.00016864
## b1        -0.017728   0.011969 0.044627   0.012528 0.015849   -- 0.00020102
## b2         -0.16242  -0.043321 0.035446  -0.051484 0.052644   --  0.0024762
## b3         0.070196    0.16217   0.2755    0.16556 0.052902   --  0.0024497
## b4         -0.07208  -0.025694 0.012263  -0.026971    0.022   -- 0.00042882
## b5         -0.05733  -0.015633 0.021245  -0.016891 0.019967   --    0.00038
## b6        -0.022996  0.0079006 0.042706  0.0084637 0.016575   -- 0.00022045
## b7        -0.055996 -0.0020457 0.049343 -0.0019256  0.02566   -- 0.00060179
## sigma       0.11278    0.13776  0.16554    0.13822 0.013477   -- 0.00017499
## nu            2.391     29.142   97.439     37.717    30.14   --    0.53945
## sigmaBeta 0.0047597   0.046573  0.15111   0.059769 0.048911   --  0.0013391
##                                             
##           MC%ofSD SSeff        AC.10    psrf
## b0              1  9677 -0.000018769  1.0001
## b1            1.3  6216      0.01753  1.0004
## b2            4.7   452      0.53569  1.0029
## b3            4.6   466      0.52184  1.0023
## b4            1.9  2632     0.050125  1.0007
## b5            1.9  2761      0.05922  1.0006
## b6            1.3  5653     0.029723  1.0003
## b7            2.3  1818       0.1015  1.0005
## sigma         1.3  5931   0.00095998 0.99999
## nu            1.8  3122      0.01845       1
## sigmaBeta     2.7  1334      0.17529  1.0015
## 
## Model fit assessment:
## DIC = -68.62247
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 8.31048
## 
## Total time taken: 51.6 seconds
plot(out_M1h2018) 
## Generating plots...

## PVAF --------------------------
b00_M1h <- as.mcmc.list(out_M1h2018, vars = "b0")
b0_M1h <- as.numeric(unlist(b00_M1h[, 1]))
b11_M1h <- as.mcmc.list(out_M1h2018, vars = "b1")
b1_M1h <- as.numeric(unlist(b11_M1h[, 1]))
b22_M1h <- as.mcmc.list(out_M1h2018, vars = "b2")
b2_M1h <- as.numeric(unlist(b22_M1h[, 1]))
b33_M1h <- as.mcmc.list(out_M1h2018, vars = "b3")
b3_M1h <- as.numeric(unlist(b33_M1h[, 1]))
b44_M1h <- as.mcmc.list(out_M1h2018, vars = "b4")
b4_M1h <- as.numeric(unlist(b44_M1h[, 1]))
b55_M1h <- as.mcmc.list(out_M1h2018, vars = "b5")
b5_M1h <- as.numeric(unlist(b55_M1h[, 1]))
b66_M1h <- as.mcmc.list(out_M1h2018, vars = "b6")
b6_M1h <- as.numeric(unlist(b66_M1h[, 1]))
b77_M1h <- as.mcmc.list(out_M1h2018, vars = "b7")
b7_M1h <- as.numeric(unlist(b77_M1h[, 1]))

sigmas_M1h <- as.mcmc.list(out_M1h2018, vars = "sigma")
s_M1h <- as.numeric(unlist(sigmas_M1h[, 1]))
nus_M1h <-  as.mcmc.list(out_M1h2018, vars = "nu")
nu_M1h <- as.numeric(unlist(nus_M1h[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1h[j*10] +
      b1_M1h[j*10] * x1[j] +
      b2_M1h[j*10] * x2[j] +
      b3_M1h[j*10] * x3[j] +
      b4_M1h[j*10] * x4[j] +
      b5_M1h[j*10] * x5[j] +
      b6_M1h[j*10] * x6[j] +
      b7_M1h[j*10] * x7[j] + s_M1h[j*10]*rt(1, nu_M1h[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1h <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1h[i*400] + 
      b1_M1h[i*400] * X1 + 
      b2_M1h[i*400] * X2 +
      b3_M1h[i*400] * X3 +
      b4_M1h[i*400] * X4 +
      b5_M1h[i*400] * X5 +
      b6_M1h[i*400] * X6 +
      b7_M1h[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1h[i*400] + 
      b1_M1h[i*400] * x1 + 
      b2_M1h[i*400] * x2 +
      b3_M1h[i*400] * x3 +
      b4_M1h[i*400] * x4 +
      b5_M1h[i*400] * x5 +
      b6_M1h[i*400] * x6 +
      b7_M1h[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1h[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1h <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1h[i*400] + 
    b1_M1h[i*400] * X1 + 
    b2_M1h[i*400] * X2 +
    b3_M1h[i*400] * X3 +
    b4_M1h[i*400] * X4 +
    b5_M1h[i*400] * X5 +
    b6_M1h[i*400] * X6 +
    b7_M1h[i*400] * X7  
  
  
  y.prd <- b0_M1h[i*400] + 
    b1_M1h[i*400] * x1 + 
    b2_M1h[i*400] * x2 +
    b3_M1h[i*400] * x3 +
    b4_M1h[i*400] * x4 +
    b5_M1h[i*400] * x5 +
    b6_M1h[i*400] * x6 +
    b7_M1h[i*400] * x7 
  
  pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1h), "\n") #  0  
## Mean Percent Variance Accounted For (PVAF): 0
# Normality check for residuals
residu <- mean(b0_M1h) + mean(b1_M1h)*x1 + mean(b2_M1h)*x2 + mean(b3_M1h)*x3 + 
  mean(b4_M1h)*x4 + mean(b5_M1h)*x5 + mean(b6_M1h)*x6 + mean(b7_M1h)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

M1nh_2018

modelString_M1nh_2018 = "
model{
  for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i], 1/sigma^2)
    }
      
  # priors vague 
  b0 ~ dnorm(0, 1/2^2)
  b1 ~ dnorm(0.0, 1/sigmaBeta^2)
  b2 ~ dnorm(0.0, 1/sigmaBeta^2)
  b3 ~ dnorm(0.0, 1/sigmaBeta^2)
  b4 ~ dnorm(0.0, 1/sigmaBeta^2)
  b5 ~ dnorm(0.0, 1/sigmaBeta^2)
  b6 ~ dnorm(0.0, 1/sigmaBeta^2)
  b7 ~ dnorm(0.0, 1/sigmaBeta^2)
  
  # Sigma
  sigma ~ dgamma(0.01, 0.01)
  sigmaBeta ~ dgamma(2.618, 1.618) 
}
"

# MCMC run
myinits_M1nh2018 <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)))

out_M1nh2018 <- run.jags(model=modelString_M1nh_2018, data=dataList, inits=myinits_M1nh2018,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "sigma", "sigmaBeta","pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 10 variables....
## Finished running the simulation
print(out_M1nh2018) # pD =9.15726 DIC = -67.58924
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                               
##             Lower95       Median  Upper95        Mean       SD Mode      MCerr
## b0          0.45606      0.48854  0.52167     0.48855 0.016699   -- 0.00013749
## b1          -0.0177     0.016158  0.04902    0.016045 0.017118   -- 0.00015476
## b2         -0.16009    -0.050253 0.050333   -0.052418 0.055198   --  0.0020802
## b3         0.057951      0.16269  0.27005     0.16289 0.055181   --  0.0020569
## b4        -0.078232    -0.031576  0.01555   -0.031499 0.023904   -- 0.00041772
## b5        -0.065222     -0.02287 0.019449   -0.022952 0.021542   -- 0.00034067
## b6        -0.025468      0.01093 0.047428    0.010967 0.018532   --  0.0002257
## b7        -0.060429 -0.000031662 0.061337 0.000017734 0.030867   -- 0.00064291
## sigma        0.1204      0.14354   0.1706     0.14452 0.013073   -- 0.00016627
## sigmaBeta  0.025556     0.093353  0.21005     0.10464  0.05402   --  0.0014975
##                                         
##           MC%ofSD SSeff     AC.10   psrf
## b0            0.8 14751 0.0040282      1
## b1            0.9 12234  0.019078      1
## b2            3.8   704   0.40165 1.0003
## b3            3.7   720   0.40255 1.0008
## b4            1.7  3275  0.029559 1.0003
## b5            1.6  3998  0.030308 1.0004
## b6            1.2  6742  0.017737 1.0001
## b7            2.1  2305  0.044081 1.0004
## sigma         1.3  6182  0.019221 1.0024
## sigmaBeta     2.8  1301   0.20904 1.0009
## 
## Model fit assessment:
## DIC = -67.31017
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 9.22115
## 
## Total time taken: 2.4 seconds
plot(out_M1nh2018) 
## Generating plots...

## PVAF --------------------------
b00_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b0")
b0_M1nh <- as.numeric(unlist(b00_M1nh[, 1]))
b11_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b1")
b1_M1nh <- as.numeric(unlist(b11_M1nh[, 1]))
b22_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b2")
b2_M1nh <- as.numeric(unlist(b22_M1nh[, 1]))
b33_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b3")
b3_M1nh <- as.numeric(unlist(b33_M1nh[, 1]))
b44_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b4")
b4_M1nh <- as.numeric(unlist(b44_M1nh[, 1]))
b55_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b5")
b5_M1nh <- as.numeric(unlist(b55_M1nh[, 1]))
b66_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b6")
b6_M1nh <- as.numeric(unlist(b66_M1nh[, 1]))
b77_M1nh <- as.mcmc.list(out_M1nh2018, vars = "b7")
b7_M1nh <- as.numeric(unlist(b77_M1nh[, 1]))

sigmas_M1nh <- as.mcmc.list(out_M1nh2018, vars = "sigma")
s_M1nh <- as.numeric(unlist(sigmas_M1nh[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots


for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1nh[j*10] +
      b1_M1nh[j*10] * x1[j] +
      b2_M1nh[j*10] * x2[j] +
      b3_M1nh[j*10] * x3[j] +
      b4_M1nh[j*10] * x4[j] +
      b5_M1nh[j*10] * x5[j] +
      b6_M1nh[j*10] * x6[j] +
      b7_M1nh[j*10] * x7[j] + rnorm(1, 0, s_M1nh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1nh <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1nh[i*400] + 
      b1_M1nh[i*400] * X1 + 
      b2_M1nh[i*400] * X2 +
      b3_M1nh[i*400] * X3 +
      b4_M1nh[i*400] * X4 +
      b5_M1nh[i*400] * X5 +
      b6_M1nh[i*400] * X6 +
      b7_M1nh[i*400] * X7 
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1nh[i*400] + 
      b1_M1nh[i*400] * x1 + 
      b2_M1nh[i*400] * x2 +
      b3_M1nh[i*400] * x3 +
      b4_M1nh[i*400] * x4 +
      b5_M1nh[i*400] * x5 +
      b6_M1nh[i*400] * x6 +
      b7_M1nh[i*400] * x7 
    
    # Calculate PVAF
    pvaf_M1nh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1nh <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1nh[i*400] + 
    b1_M1nh[i*400] * X1 + 
    b2_M1nh[i*400] * X2 +
    b3_M1nh[i*400] * X3 +
    b4_M1nh[i*400] * X4 +
    b5_M1nh[i*400] * X5 +
    b6_M1nh[i*400] * X6 +
    b7_M1nh[i*400] * X7 
  
  y.prd <- b0_M1nh[i*400] + 
    b1_M1nh[i*400] * x1 + 
    b2_M1nh[i*400] * x2 +
    b3_M1nh[i*400] * x3 +
    b4_M1nh[i*400] * x4 +
    b5_M1nh[i*400] * x5 +
    b6_M1nh[i*400] * x6 +
    b7_M1nh[i*400] * x7 
  
  pvaf_M1nh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1nh), "\n") # 0.3857553 
## Mean Percent Variance Accounted For (PVAF): 0.3758539
# Normality check for residuals
residu <- mean(b0_M1nh) + mean(b1_M1nh)*x1 + mean(b2_M1nh)*x2 + mean(b3_M1nh)*x3 + 
  mean(b4_M1nh)*x4 + mean(b5_M1nh)*x5 + mean(b6_M1nh)*x6 + mean(b7_M1nh)*x7 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

modelString_M1xh = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dt(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2, nu)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b2 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b3 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b4 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b5 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b6 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b7 ~ dt(0.0, 1/sigmaBeta^2, 1)
    b23 ~ dnorm(0, 1/2^2)
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    sigmaBeta ~ dgamma(2.618, 1.618)
    nu ~ dexp(0.0333)

    }
"
# MCMC run
myinits_M1xh <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)),
                           list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                                b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                                sigma = runif(1), sigmaBeta= runif(1)))

out_M1xh <- run.jags (model=modelString_M1xh, data=dataList, inits=myinits_M1xh,
                        n.chains=3,
                        adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                      "b5","b6", "b7", "b23", "sigma", "sigmaBeta", "nu", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 12 variables....
## Finished running the simulation
print(out_M1xh) 
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                            
##             Lower95     Median  Upper95       Mean       SD Mode      MCerr
## b0          0.45646     0.4931  0.53062    0.49296 0.018968   -- 0.00025292
## b1        -0.020254  0.0094034 0.043077   0.010285 0.016099   -- 0.00020267
## b2         -0.16249  -0.044366 0.032738   -0.05168 0.051172   --  0.0022348
## b3         0.075902     0.1678  0.27556    0.17115 0.051385   --  0.0021194
## b4        -0.075401  -0.028443 0.011168  -0.029431 0.022374   --  0.0004565
## b5        -0.055116   -0.01159 0.024578  -0.012991 0.020342   -- 0.00036406
## b6        -0.025014  0.0068015 0.041612  0.0073254 0.016752   --  0.0002413
## b7        -0.057299 -0.0034263 0.047654 -0.0040191 0.025816   --  0.0005964
## b23       -0.026019 -0.0064748 0.013655 -0.0062453  0.01022   -- 0.00014929
## sigma       0.11288     0.1381  0.16822    0.13876 0.013877   -- 0.00019233
## sigmaBeta 0.0040058     0.0461  0.14495   0.059559 0.052725   --  0.0014445
## nu           2.6346     29.328   101.81     38.698   31.835   --    0.62214
##                                          
##           MC%ofSD SSeff     AC.10    psrf
## b0            1.3  5625 -0.006073   1.001
## b1            1.3  6310  0.016802 0.99996
## b2            4.4   524   0.51208  1.0049
## b3            4.1   588   0.48106  1.0055
## b4              2  2402  0.074353  1.0006
## b5            1.8  3122  0.064049  1.0015
## b6            1.4  4820  0.032571  1.0003
## b7            2.3  1874   0.11893  1.0014
## b23           1.5  4687 0.0075331  1.0006
## sigma         1.4  5206  0.011825  1.0002
## sigmaBeta     2.7  1332   0.25316   1.005
## nu              2  2618  0.039911  1.0001
## 
## Model fit assessment:
## DIC = -66.65673
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 9.2968
## 
## Total time taken: 54.3 seconds
plot(out_M1xh) 
## Generating plots...

## PVAF --------------------------
b00_M1xh <- as.mcmc.list(out_M1xh, vars = "b0")
b0_M1xh <- as.numeric(unlist(b00_M1xh[, 1]))
b11_M1xh <- as.mcmc.list(out_M1xh, vars = "b1")
b1_M1xh <- as.numeric(unlist(b11_M1xh[, 1]))
b22_M1xh <- as.mcmc.list(out_M1xh, vars = "b")
b2_M1xh <- as.numeric(unlist(b22_M1xh[, 1]))
b33_M1xh <- as.mcmc.list(out_M1xh, vars = "b3")
b3_M1xh <- as.numeric(unlist(b33_M1xh[, 1]))
b44_M1xh <- as.mcmc.list(out_M1xh, vars = "b4")
b4_M1xh <- as.numeric(unlist(b44_M1xh[, 1]))
b55_M1xh <- as.mcmc.list(out_M1xh, vars = "b5")
b5_M1xh <- as.numeric(unlist(b55_M1xh[, 1]))
b66_M1xh <- as.mcmc.list(out_M1xh, vars = "b6")
b6_M1xh <- as.numeric(unlist(b66_M1xh[, 1]))
b77_M1xh <- as.mcmc.list(out_M1xh, vars = "b7")
b7_M1xh <- as.numeric(unlist(b77_M1xh[, 1]))
b233_M1xh <- as.mcmc.list(out_M1xh, vars = "b23")
b23_M1xh <- as.numeric(unlist(b233_M1xh[, 1]))

sigmas_M1xh <- as.mcmc.list(out_M1xh, vars = "sigma")
s_M1xh <- as.numeric(unlist(sigmas_M1xh[, 1]))
nus_M1xh <-  as.mcmc.list(out_M1xh, vars = "nu")
nu_M1xh <- as.numeric(unlist(nus_M1xh[, 1]))

# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots

for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xh[j*10] +
      b1_M1xh[j*10] * x1[j] +
      b2_M1xh[j*10] * x2[j] +
      b3_M1xh[j*10] * x3[j] +
      b4_M1xh[j*10] * x4[j] +
      b5_M1xh[j*10] * x5[j] +
      b6_M1xh[j*10] * x6[j] +
      b7_M1xh[j*10] * x7[j] + b23_M1xh[j*10]*x2[j]*x3[j] + s_M1xh[j*10]*rt(1, nu_M1xh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1 <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xh[i*400] + 
      b1_M1xh[i*400] * X1 + 
      b2_M1xh[i*400] * X2 +
      b3_M1xh[i*400] * X3 +
      b4_M1xh[i*400] * X4 +
      b5_M1xh[i*400] * X5 +
      b6_M1xh[i*400] * X6 +
      b7_M1xh[i*400] * X7 + b23_M1xh[i*400]*X2*X3
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xh[i*400] +
      b1_M1xh[i*400] * x1 +
      b2_M1xh[i*400] * x2 +
      b3_M1xh[i*400] * x3 +
      b4_M1xh[i*400] * x4 +
      b5_M1xh[i*400] * x5 +
      b6_M1xh[i*400] * x6 +
      b7_M1xh[i*400] * x7 + b23_M1xh[i*400]*x2*x3
    
    # Calculate PVAF
    pvaf_M1[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1x <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xh[i*400] + 
    b1_M1xh[i*400] * X1 + 
    b2_M1xh[i*400] * X2 +
    b3_M1xh[i*400] * X3 +
    b4_M1xh[i*400] * X4 +
    b5_M1xh[i*400] * X5 +
    b6_M1xh[i*400] * X6 +
    b7_M1xh[i*400] * X7 + b23_M1xh[i*100]*X2*X3
  
  
  y.prd <- b0_M1xh[i*400] +
    b1_M1xh[i*400] * x1 +
    b2_M1xh[i*400] * x2 +
    b3_M1xh[i*400] * x3 +
    b4_M1xh[i*400] * x4 +
    b5_M1xh[i*400] * x5 +
    b6_M1xh[i*400] * x6 +
    b7_M1xh[i*400] * x7 + b23_M1xh[i*400]*x2*x3
  
  pvaf_M1x[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1x), "\n") #-0.1799237 
## Mean Percent Variance Accounted For (PVAF): -8.874511
# Normality check for residuals
residu <- mean(b0_M1xh) + mean(b1_M1xh)*x1 + mean(b2_M1xh)*x2 + mean(b3_M1xh)*x3 + 
  mean(b4_M1xh)*x4 + mean(b5_M1xh)*x5 + mean(b6_M1xh)*x6 + mean(b7_M1xh)*x7 + mean(b23_M1xh)*x2*x3 - y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)

# Define the model with no individual differences
modelString_M1xnh = "
  model{
    for (i in 1:Ntotal){
      y[i] ~ dnorm(b0 + 
      b1*x1[i] + b2*x2[i] + b3*x3[i] + b4*x4[i] +
      b5*x5[i] + b6*x6[i] + b7*x7[i] + b23*x2[i]*x3[i], 1/sigma^2)
    }
      
    # priors vague 
    b0 ~ dnorm(0, 1/2^2)
    b1 ~ dnorm(0.0, 1/sigmaBeta^2)
    b2 ~ dnorm(0.0, 1/sigmaBeta^2)
    b3 ~ dnorm(0.0, 1/sigmaBeta^2)
    b4 ~ dnorm(0.0, 1/sigmaBeta^2)
    b5 ~ dnorm(0.0, 1/sigmaBeta^2)
    b6 ~ dnorm(0.0, 1/sigmaBeta^2)
    b7 ~ dnorm(0.0, 1/sigmaBeta^2)
    b23 ~ dnorm(0, 1/2^2)
    
    # Sigma
    sigma ~ dgamma(0.01, 0.01)
    sigmaBeta ~ dgamma(2.618, 1.618)
    
    }
"
# MCMC run
myinits_M1xnh <- list (list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)),
                         list(b0 = runif(1), b1 = runif(1), b2 = runif(1), b3 = runif(1),
                              b4 = runif(1), b5 = runif(1), b6 = runif(1), b7 = runif(1), b23 = runif(1),
                              sigma = runif(1), sigmaBeta = runif(1)))

out_M1xnh <- run.jags (model=modelString_M1xnh, data=dataList, inits=myinits_M1xnh,
                         n.chains=3,
                         adapt=500, burnin=500, sample=5000, monitor=c("b0","b1", "b2", "b3", "b4", 
                                                                       "b5","b6", "b7", "b23", "sigma", "sigmaBeta", "pD", "DIC"))
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Compiling rjags model...
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## basemod: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 3 is invalid
## Warning in grepl("Welcome", returnval): unable to translate 'Loading module:
## bugs: <c1><f6><c1><a4><b5><c8> <b8><f0><b5><e2><c0><bb> ã<c0><bb> <bc><f6>
## <be><f8><bd><c0><b4>ϴ<d9>.' to a wide string
## Warning in grepl("Welcome", returnval): input string 4 is invalid
## Calling the simulation using the rjags method...
## Adapting the model for 500 iterations...
## Burning in the model for 500 iterations...
## Running the model for 5000 iterations...
## Extending 5000 iterations for pD/DIC estimates...
## Simulation complete
## Calculating summary statistics...
## Calculating the Gelman-Rubin statistic for 11 variables....
## Finished running the simulation
print(out_M1xnh) # pD = 9.3521, -68.97304
## 
## JAGS model summary statistics from 15000 samples (chains = 3; adapt+burnin = 1000):
##                                                                             
##             Lower95     Median  Upper95        Mean       SD Mode      MCerr
## b0          0.45526    0.49325  0.53211     0.49334 0.019543   -- 0.00021949
## b1        -0.021333   0.014375 0.048228    0.014427 0.017661   -- 0.00016912
## b2         -0.15695  -0.046937 0.052507   -0.049673 0.054912   --  0.0021021
## b3         0.058809    0.16219  0.27471     0.16227 0.056417   --    0.00211
## b4        -0.080284  -0.032246 0.014963   -0.032469 0.024385   --  0.0004284
## b5        -0.061817  -0.019114 0.026245   -0.019141 0.022511   -- 0.00037996
## b6        -0.027346  0.0096926  0.04581   0.0094809 0.018651   -- 0.00021933
## b7        -0.064149 -0.0004259 0.059918 -0.00050996 0.031333   -- 0.00066974
## b23        -0.02558 -0.0052664 0.014982  -0.0053319 0.010388   -- 0.00012766
## sigma       0.12079    0.14464   0.1725      0.1456 0.013332   -- 0.00019137
## sigmaBeta  0.023763   0.093074  0.22292     0.10708 0.062574   --  0.0019749
##                                          
##           MC%ofSD SSeff     AC.10    psrf
## b0            1.1  7928 0.0015668 0.99995
## b1              1 10906 0.0016474 0.99991
## b2            3.8   682    0.3978  1.0022
## b3            3.7   715   0.39375  1.0019
## b4            1.8  3240  0.030784  1.0005
## b5            1.7  3510  0.052006  1.0002
## b6            1.2  7231  0.022453  1.0001
## b7            2.1  2189  0.036398  1.0007
## b23           1.2  6621 0.0097968  0.9999
## sigma         1.4  4853  0.028764  1.0001
## sigmaBeta     3.2  1004   0.27895  1.0027
## 
## Model fit assessment:
## DIC = -65.65599
## [PED not available from the stored object]
## Estimated effective number of parameters:  pD = 10.11587
## 
## Total time taken: 2.4 seconds
plot(out_M1xnh) 
## Generating plots...

## PVAF --------------------------
b00_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b0")
b0_M1xnh <- as.numeric(unlist(b00_M1xnh[, 1]))
b11_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b1")
b1_M1xnh <- as.numeric(unlist(b11_M1xnh[, 1]))
b22_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b")
b2_M1xnh <- as.numeric(unlist(b22_M1xnh[, 1]))
b33_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b3")
b3_M1xnh <- as.numeric(unlist(b33_M1xnh[, 1]))
b44_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b4")
b4_M1xnh <- as.numeric(unlist(b44_M1xnh[, 1]))
b55_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b5")
b5_M1xnh <- as.numeric(unlist(b55_M1xnh[, 1]))
b66_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b6")
b6_M1xnh <- as.numeric(unlist(b66_M1xnh[, 1]))
b77_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b7")
b7_M1xnh <- as.numeric(unlist(b77_M1xnh[, 1]))
b233_M1xnh <- as.mcmc.list(out_M1xnh, vars = "b23")
b23_M1xnh <- as.numeric(unlist(b233_M1xnh[, 1]))


sigmas_M1xnh <- as.mcmc.list(out_M1xnh, vars = "sigma")
s_M1xnh <- as.numeric(unlist(sigmas_M1xnh[, 1]))


# Posterior Predictive Check (PPC)
X1 <- seq(min(x1), max(x1), length.out = 74)
X2 <- seq(min(x2), max(x2), length.out = 74)
X3 <- seq(min(x3), max(x3), length.out = 74)
X4 <- seq(min(x4), max(x4), length.out = 74)
X5 <- seq(min(x5), max(x5), length.out = 74)
X6 <- seq(min(x6), max(x6), length.out = 74)
X7 <- seq(min(x7), max(x7), length.out = 74)


# Plot observed data
# Generate PPC-simulated points

nn <- length(y)  # Assuming x1 to x7 have the same length

par(mfrow = c(4, 2))  # Set 4 rows and 2 columns for subplots


for (i in 1:7) {
  # Get x1, x2, ..., x7 dynamically
  x_var <- get(paste0("x", i))  # x1, x2, ..., x7
  
  # Scatter plot of observed and PPC-simulated data
  plot(x_var, y, col = "black", pch = 1, cex = 0.8,
       main = paste("Plot for x", i, sep = ""),
       xlab = "x values", ylab = "y")
  
  # Generate y.ppc for the current x_var
  y_ppc <- rep(0, nn)
  
  for (j in 1:nn) {
    y_ppc[j] <- b0_M1xnh[j*10] +
      b1_M1xnh[j*10] * x1[j] +
      b2_M1xnh[j*10] * x2[j] +
      b3_M1xnh[j*10] * x3[j] +
      b4_M1xnh[j*10] * x4[j] +
      b5_M1xnh[j*10] * x5[j] +
      b6_M1xnh[j*10] * x6[j] +
      b7_M1xnh[j*10] * x7[j] + b23_M1xnh[j*10]*x2[j]*x3[j] + rnorm(1, 0, s_M1xnh[j*10]) 
  }
  
  # Add PPC points
  points(x_var, y_ppc, col = "red", cex = 0.8, pch = 18)
  text = c("Observed", "PPC-simulated")
  pchx = c(1, 18)
  # Add legend
  legend("topright", legend = text,
         pch = pchx, col = c("black", "red"), cex = 0.8)
  
  # Posterior predictive regression line
  mm <- 20
  pvaf_M1xnh <- numeric(mm)
  X_var <- get(paste0("X", i))  # x1, x2, ..., x7
  for (i in 1:mm){
    # Generate posterior predictive line
    ppd1 <- b0_M1xnh[i*400] + 
      b1_M1xnh[i*400] * X1 + 
      b2_M1xnh[i*400] * X2 +
      b3_M1xnh[i*400] * X3 +
      b4_M1xnh[i*400] * X4 +
      b5_M1xnh[i*400] * X5 +
      b6_M1xnh[i*400] * X6 +
      b7_M1xnh[i*400] * X7 + 
      b23_M1xnh[i*400]*X2*X3 +
      rnorm(1, 0, s_M1xnh[i*400])
    
    # Predicted y for each observed point in y
    y.prd <- b0_M1xnh[i*400] + 
      b1_M1xnh[i*400] * x1 + 
      b2_M1xnh[i*400] * x2 +
      b3_M1xnh[i*400] * x3 +
      b4_M1xnh[i*400] * x4 +
      b5_M1xnh[i*400] * x5 +
      b6_M1xnh[i*400] * x6 +
      b7_M1xnh[i*400] * x7 + 
      b23_M1xnh[i*400]*X2*X3 
    
    # Calculate PVAF
    pvaf_M1xnh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
    lines(X_var, ppd1, lwd = 1, col = "red")
    
  }
}

# Reset layout to default
par(mfrow = c(1, 1))

# Posterior predictive regression lines and PVAF calculation
mm <- 20
pvaf_M1xnh <- rep(0, mm)
for ( i in 1:mm){
  ppd1 <- b0_M1xnh[i*400] + 
    b1_M1xnh[i*400] * X1 + 
    b2_M1xnh[i*400] * X2 +
    b3_M1xnh[i*400] * X3 +
    b4_M1xnh[i*400] * X4 +
    b5_M1xnh[i*400] * X5 +
    b6_M1xnh[i*400] * X6 +
    b7_M1xnh[i*400] * X7 +  
    b23_M1xnh[i*400]*X2*X3 +
    rnorm(1, 0, s_M1xnh[i*400])
  
  y.prd <- b0_M1xnh[i*400] + 
    b1_M1xnh[i*400] * x1 + 
    b2_M1xnh[i*400] * x2 +
    b3_M1xnh[i*400] * x3 +
    b4_M1xnh[i*400] * x4 +
    b5_M1xnh[i*400] * x5 +
    b6_M1xnh[i*400] * x6 +
    b7_M1xnh[i*400] * x7 +
    b23_M1xnh[i*400]*X2*X3
  
  pvaf_M1xnh[i] <- 1 - sum((y - y.prd)^2) / sum((y - mean(y))^2)
}

# Display PVAF
cat('Mean Percent Variance Accounted For (PVAF):', mean(pvaf_M1xnh), "\n") # 0.3857553 
## Mean Percent Variance Accounted For (PVAF): -9.092074
# Normality check for residuals
residu <- mean(b0_M1xnh) + mean(b1_M1xnh)*x1 + mean(b2_M1xnh)*x2 + mean(b3_M1xnh)*x3 + 
  mean(b4_M1xnh)*x4 + mean(b5_M1xnh)*x5 + mean(b6_M1xnh)*x6 + mean(b7_M1xnh)*x7 + mean(b23_M1xnh)*x2*x3- y
qqnorm(residu) ; qqline(residu)

hist(residu, breaks = 11)